https://cran.r-project.org/web/packages/rstanarm/index.html

0.1 Intro to stan

library(rstan)
## Loading required package: ggplot2
## Loading required package: StanHeaders
## rstan (Version 2.18.2, GitRev: 2e1f913d3ca3)
## For execution on a local, multicore CPU with excess RAM we recommend calling
## options(mc.cores = parallel::detectCores()).
## To avoid recompilation of unchanged Stan programs, we recommend calling
## rstan_options(auto_write = TRUE)
library(bayesplot)
## This is bayesplot version 1.6.0
## - Online documentation and vignettes at mc-stan.org/bayesplot
## - bayesplot theme set to bayesplot::theme_default()
##    * Does _not_ affect other ggplot2 plots
##    * See ?bayesplot_theme_set for details on theme setting
library(tidyverse)
## ── Attaching packages ───────────── tidyverse 1.2.1 ──
## ✔ tibble  2.0.0     ✔ purrr   0.2.5
## ✔ tidyr   0.8.2     ✔ dplyr   0.7.8
## ✔ readr   1.3.1     ✔ stringr 1.3.1
## ✔ tibble  2.0.0     ✔ forcats 0.3.0
## ── Conflicts ──────────────── tidyverse_conflicts() ──
## ✖ tidyr::extract() masks rstan::extract()
## ✖ dplyr::filter()  masks stats::filter()
## ✖ dplyr::lag()     masks stats::lag()
options(mc.cores = parallel::detectCores())
rstan_options(auto_write = TRUE)

0.1.1 Simple example (stan script -> stan_model -> sampling)

stancode <- 'data {real y_mean;} 
              parameters {real y;} 
              model {y ~ normal(y_mean,1);}'
mod <- stan_model(model_code = stancode)
simple.fit <- sampling(mod, data = list(y_mean = 0))
## 
## SAMPLING FOR MODEL '5a0eac41591944c7d7b299fda75d726b' NOW (CHAIN 1).
## Chain 1: 
## Chain 1: Gradient evaluation took 5e-06 seconds
## Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0.05 seconds.
## Chain 1: Adjust your expectations accordingly!
## Chain 1: 
## Chain 1: 
## Chain 1: Iteration:    1 / 2000 [  0%]  (Warmup)
## Chain 1: Iteration:  200 / 2000 [ 10%]  (Warmup)
## Chain 1: Iteration:  400 / 2000 [ 20%]  (Warmup)
## Chain 1: Iteration:  600 / 2000 [ 30%]  (Warmup)
## Chain 1: Iteration:  800 / 2000 [ 40%]  (Warmup)
## Chain 1: Iteration: 1000 / 2000 [ 50%]  (Warmup)
## Chain 1: Iteration: 1001 / 2000 [ 50%]  (Sampling)
## Chain 1: Iteration: 1200 / 2000 [ 60%]  (Sampling)
## Chain 1: Iteration: 1400 / 2000 [ 70%]  (Sampling)
## Chain 1: Iteration: 1600 / 2000 [ 80%]  (Sampling)
## Chain 1: Iteration: 1800 / 2000 [ 90%]  (Sampling)
## Chain 1: Iteration: 2000 / 2000 [100%]  (Sampling)
## Chain 1: 
## Chain 1:  Elapsed Time: 0.00638 seconds (Warm-up)
## Chain 1:                0.00696 seconds (Sampling)
## Chain 1:                0.01334 seconds (Total)
## Chain 1: 
## 
## SAMPLING FOR MODEL '5a0eac41591944c7d7b299fda75d726b' NOW (CHAIN 2).
## Chain 2: 
## Chain 2: Gradient evaluation took 2e-06 seconds
## Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 0.02 seconds.
## Chain 2: Adjust your expectations accordingly!
## Chain 2: 
## Chain 2: 
## Chain 2: Iteration:    1 / 2000 [  0%]  (Warmup)
## Chain 2: Iteration:  200 / 2000 [ 10%]  (Warmup)
## Chain 2: Iteration:  400 / 2000 [ 20%]  (Warmup)
## Chain 2: Iteration:  600 / 2000 [ 30%]  (Warmup)
## Chain 2: Iteration:  800 / 2000 [ 40%]  (Warmup)
## Chain 2: Iteration: 1000 / 2000 [ 50%]  (Warmup)
## Chain 2: Iteration: 1001 / 2000 [ 50%]  (Sampling)
## Chain 2: Iteration: 1200 / 2000 [ 60%]  (Sampling)
## Chain 2: Iteration: 1400 / 2000 [ 70%]  (Sampling)
## Chain 2: Iteration: 1600 / 2000 [ 80%]  (Sampling)
## Chain 2: Iteration: 1800 / 2000 [ 90%]  (Sampling)
## Chain 2: Iteration: 2000 / 2000 [100%]  (Sampling)
## Chain 2: 
## Chain 2:  Elapsed Time: 0.006298 seconds (Warm-up)
## Chain 2:                0.005956 seconds (Sampling)
## Chain 2:                0.012254 seconds (Total)
## Chain 2: 
## 
## SAMPLING FOR MODEL '5a0eac41591944c7d7b299fda75d726b' NOW (CHAIN 3).
## Chain 3: 
## Chain 3: Gradient evaluation took 3e-06 seconds
## Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 0.03 seconds.
## Chain 3: Adjust your expectations accordingly!
## Chain 3: 
## Chain 3: 
## Chain 3: Iteration:    1 / 2000 [  0%]  (Warmup)
## Chain 3: Iteration:  200 / 2000 [ 10%]  (Warmup)
## Chain 3: Iteration:  400 / 2000 [ 20%]  (Warmup)
## Chain 3: Iteration:  600 / 2000 [ 30%]  (Warmup)
## Chain 3: Iteration:  800 / 2000 [ 40%]  (Warmup)
## Chain 3: Iteration: 1000 / 2000 [ 50%]  (Warmup)
## Chain 3: Iteration: 1001 / 2000 [ 50%]  (Sampling)
## Chain 3: Iteration: 1200 / 2000 [ 60%]  (Sampling)
## Chain 3: Iteration: 1400 / 2000 [ 70%]  (Sampling)
## Chain 3: Iteration: 1600 / 2000 [ 80%]  (Sampling)
## Chain 3: Iteration: 1800 / 2000 [ 90%]  (Sampling)
## Chain 3: Iteration: 2000 / 2000 [100%]  (Sampling)
## Chain 3: 
## Chain 3:  Elapsed Time: 0.006316 seconds (Warm-up)
## Chain 3:                0.0063 seconds (Sampling)
## Chain 3:                0.012616 seconds (Total)
## Chain 3: 
## 
## SAMPLING FOR MODEL '5a0eac41591944c7d7b299fda75d726b' NOW (CHAIN 4).
## Chain 4: 
## Chain 4: Gradient evaluation took 2e-06 seconds
## Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 0.02 seconds.
## Chain 4: Adjust your expectations accordingly!
## Chain 4: 
## Chain 4: 
## Chain 4: Iteration:    1 / 2000 [  0%]  (Warmup)
## Chain 4: Iteration:  200 / 2000 [ 10%]  (Warmup)
## Chain 4: Iteration:  400 / 2000 [ 20%]  (Warmup)
## Chain 4: Iteration:  600 / 2000 [ 30%]  (Warmup)
## Chain 4: Iteration:  800 / 2000 [ 40%]  (Warmup)
## Chain 4: Iteration: 1000 / 2000 [ 50%]  (Warmup)
## Chain 4: Iteration: 1001 / 2000 [ 50%]  (Sampling)
## Chain 4: Iteration: 1200 / 2000 [ 60%]  (Sampling)
## Chain 4: Iteration: 1400 / 2000 [ 70%]  (Sampling)
## Chain 4: Iteration: 1600 / 2000 [ 80%]  (Sampling)
## Chain 4: Iteration: 1800 / 2000 [ 90%]  (Sampling)
## Chain 4: Iteration: 2000 / 2000 [100%]  (Sampling)
## Chain 4: 
## Chain 4:  Elapsed Time: 0.006213 seconds (Warm-up)
## Chain 4:                0.0071 seconds (Sampling)
## Chain 4:                0.013313 seconds (Total)
## Chain 4:
simple.fit2 <- sampling(mod, data = list(y_mean = 5))
## 
## SAMPLING FOR MODEL '5a0eac41591944c7d7b299fda75d726b' NOW (CHAIN 1).
## Chain 1: 
## Chain 1: Gradient evaluation took 4e-06 seconds
## Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0.04 seconds.
## Chain 1: Adjust your expectations accordingly!
## Chain 1: 
## Chain 1: 
## Chain 1: Iteration:    1 / 2000 [  0%]  (Warmup)
## Chain 1: Iteration:  200 / 2000 [ 10%]  (Warmup)
## Chain 1: Iteration:  400 / 2000 [ 20%]  (Warmup)
## Chain 1: Iteration:  600 / 2000 [ 30%]  (Warmup)
## Chain 1: Iteration:  800 / 2000 [ 40%]  (Warmup)
## Chain 1: Iteration: 1000 / 2000 [ 50%]  (Warmup)
## Chain 1: Iteration: 1001 / 2000 [ 50%]  (Sampling)
## Chain 1: Iteration: 1200 / 2000 [ 60%]  (Sampling)
## Chain 1: Iteration: 1400 / 2000 [ 70%]  (Sampling)
## Chain 1: Iteration: 1600 / 2000 [ 80%]  (Sampling)
## Chain 1: Iteration: 1800 / 2000 [ 90%]  (Sampling)
## Chain 1: Iteration: 2000 / 2000 [100%]  (Sampling)
## Chain 1: 
## Chain 1:  Elapsed Time: 0.006387 seconds (Warm-up)
## Chain 1:                0.006576 seconds (Sampling)
## Chain 1:                0.012963 seconds (Total)
## Chain 1: 
## 
## SAMPLING FOR MODEL '5a0eac41591944c7d7b299fda75d726b' NOW (CHAIN 2).
## Chain 2: 
## Chain 2: Gradient evaluation took 3e-06 seconds
## Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 0.03 seconds.
## Chain 2: Adjust your expectations accordingly!
## Chain 2: 
## Chain 2: 
## Chain 2: Iteration:    1 / 2000 [  0%]  (Warmup)
## Chain 2: Iteration:  200 / 2000 [ 10%]  (Warmup)
## Chain 2: Iteration:  400 / 2000 [ 20%]  (Warmup)
## Chain 2: Iteration:  600 / 2000 [ 30%]  (Warmup)
## Chain 2: Iteration:  800 / 2000 [ 40%]  (Warmup)
## Chain 2: Iteration: 1000 / 2000 [ 50%]  (Warmup)
## Chain 2: Iteration: 1001 / 2000 [ 50%]  (Sampling)
## Chain 2: Iteration: 1200 / 2000 [ 60%]  (Sampling)
## Chain 2: Iteration: 1400 / 2000 [ 70%]  (Sampling)
## Chain 2: Iteration: 1600 / 2000 [ 80%]  (Sampling)
## Chain 2: Iteration: 1800 / 2000 [ 90%]  (Sampling)
## Chain 2: Iteration: 2000 / 2000 [100%]  (Sampling)
## Chain 2: 
## Chain 2:  Elapsed Time: 0.006666 seconds (Warm-up)
## Chain 2:                0.006629 seconds (Sampling)
## Chain 2:                0.013295 seconds (Total)
## Chain 2: 
## 
## SAMPLING FOR MODEL '5a0eac41591944c7d7b299fda75d726b' NOW (CHAIN 3).
## Chain 3: 
## Chain 3: Gradient evaluation took 3e-06 seconds
## Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 0.03 seconds.
## Chain 3: Adjust your expectations accordingly!
## Chain 3: 
## Chain 3: 
## Chain 3: Iteration:    1 / 2000 [  0%]  (Warmup)
## Chain 3: Iteration:  200 / 2000 [ 10%]  (Warmup)
## Chain 3: Iteration:  400 / 2000 [ 20%]  (Warmup)
## Chain 3: Iteration:  600 / 2000 [ 30%]  (Warmup)
## Chain 3: Iteration:  800 / 2000 [ 40%]  (Warmup)
## Chain 3: Iteration: 1000 / 2000 [ 50%]  (Warmup)
## Chain 3: Iteration: 1001 / 2000 [ 50%]  (Sampling)
## Chain 3: Iteration: 1200 / 2000 [ 60%]  (Sampling)
## Chain 3: Iteration: 1400 / 2000 [ 70%]  (Sampling)
## Chain 3: Iteration: 1600 / 2000 [ 80%]  (Sampling)
## Chain 3: Iteration: 1800 / 2000 [ 90%]  (Sampling)
## Chain 3: Iteration: 2000 / 2000 [100%]  (Sampling)
## Chain 3: 
## Chain 3:  Elapsed Time: 0.00621 seconds (Warm-up)
## Chain 3:                0.007021 seconds (Sampling)
## Chain 3:                0.013231 seconds (Total)
## Chain 3: 
## 
## SAMPLING FOR MODEL '5a0eac41591944c7d7b299fda75d726b' NOW (CHAIN 4).
## Chain 4: 
## Chain 4: Gradient evaluation took 2e-06 seconds
## Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 0.02 seconds.
## Chain 4: Adjust your expectations accordingly!
## Chain 4: 
## Chain 4: 
## Chain 4: Iteration:    1 / 2000 [  0%]  (Warmup)
## Chain 4: Iteration:  200 / 2000 [ 10%]  (Warmup)
## Chain 4: Iteration:  400 / 2000 [ 20%]  (Warmup)
## Chain 4: Iteration:  600 / 2000 [ 30%]  (Warmup)
## Chain 4: Iteration:  800 / 2000 [ 40%]  (Warmup)
## Chain 4: Iteration: 1000 / 2000 [ 50%]  (Warmup)
## Chain 4: Iteration: 1001 / 2000 [ 50%]  (Sampling)
## Chain 4: Iteration: 1200 / 2000 [ 60%]  (Sampling)
## Chain 4: Iteration: 1400 / 2000 [ 70%]  (Sampling)
## Chain 4: Iteration: 1600 / 2000 [ 80%]  (Sampling)
## Chain 4: Iteration: 1800 / 2000 [ 90%]  (Sampling)
## Chain 4: Iteration: 2000 / 2000 [100%]  (Sampling)
## Chain 4: 
## Chain 4:  Elapsed Time: 0.006102 seconds (Warm-up)
## Chain 4:                0.005796 seconds (Sampling)
## Chain 4:                0.011898 seconds (Total)
## Chain 4:

0.1.2 Write an .stan file and run the analysis with stan()

https://ourcodingclub.github.io/2018/04/17/stan-intro.html

Comments are indicated by // in Stan. The write("model code", "file_name") bit allows us to write the Stan model in our R script and output the file to the working directory (or you can set a different file path).

write("// Stan model for simple linear regression

data {
 int < lower = 1 > N; // Sample size
 vector[N] x; // Predictor
 vector[N] y; // Outcome
}

parameters {
 real alpha; // Intercept
 real beta; // Slope (regression coefficients)
 real < lower = 0 > sigma; // Error SD
}

model {
 y ~ normal(alpha + x * beta , sigma);
}

generated quantities {
} // The posterior predictive distribution",

"stan_model1.stan")

# stanc("stan_model1.stan")

stan_model1 <- "stan_model1.stan"

#load data

# Adding stringsAsFactors = F means that numeric variables won't be
# read in as factors/categorical variables
seaice <- read.csv("seaice.csv", stringsAsFactors = F)
head(seaice)
##   year extent_north extent_south
## 1 1979       12.328       11.700
## 2 1980       12.337       11.230
## 3 1981       12.127       11.435
## 4 1982       12.447       11.640
## 5 1983       12.332       11.389
## 6 1984       11.910       11.454
#To explore the answer to that question, first we can make a figure.
plot(extent_north ~ year, pch = 20, data = seaice)

#preparing data for stan
x <- I(seaice$year - 1978) #rename the variables and index the years from 1 to 39. 
y <- seaice$extent_north
N <- length(seaice$year)
stan_data <- list(N = N, x = x, y = y)

#fit the model with stan
stan_model1.fit <- rstan::stan(file = stan_model1, 
            data = stan_data, 
            warmup = 500, iter = 1000, 
            chains = 4, cores = 2, thin = 1)

stan_model1.fit
## Inference for Stan model: stan_model1.
## 4 chains, each with iter=1000; warmup=500; thin=1; 
## post-warmup draws per chain=500, total post-warmup draws=2000.
## 
##        mean se_mean   sd  2.5%   25%   50%   75% 97.5% n_eff Rhat
## alpha 12.55    0.00 0.07 12.41 12.51 12.55 12.60 12.70   935    1
## beta  -0.05    0.00 0.00 -0.06 -0.06 -0.05 -0.05 -0.05  1010    1
## sigma  0.23    0.00 0.03  0.18  0.21  0.23  0.25  0.29  1137    1
## lp__  37.49    0.04 1.17 34.49 36.93 37.80 38.35 38.86   824    1
## 
## Samples were drawn using NUTS(diag_e) at Wed Jan  9 23:52:31 2019.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at 
## convergence, Rhat=1).
#look at the full posterior of our parameters by extracting them from the model object.
#extract() puts the posterior estimates for each parameter into a list.
posterior <- rstan::extract(stan_model1.fit)
str(posterior)
## List of 4
##  $ alpha: num [1:2000(1d)] 12.6 12.5 12.6 12.5 12.7 ...
##   ..- attr(*, "dimnames")=List of 1
##   .. ..$ iterations: NULL
##  $ beta : num [1:2000(1d)] -0.0543 -0.0541 -0.0514 -0.0528 -0.0577 ...
##   ..- attr(*, "dimnames")=List of 1
##   .. ..$ iterations: NULL
##  $ sigma: num [1:2000(1d)] 0.243 0.195 0.201 0.228 0.228 ...
##   ..- attr(*, "dimnames")=List of 1
##   .. ..$ iterations: NULL
##  $ lp__ : num [1:2000(1d)] 38.4 38.4 33.8 38.4 36.1 ...
##   ..- attr(*, "dimnames")=List of 1
##   .. ..$ iterations: NULL
#We can also look at the posterior densities & histograms.
stan_dens(stan_model1.fit)

stan_hist(stan_model1.fit)
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

#we can generate plots which indicate the mean parameter estimates and
#any credible intervals we may be interested in. 
#Note that the 95% credible intervals for the beta and 
#sigma parameters are very small, thus you only see the dots.
plot(stan_model1.fit, show_density = FALSE, ci_level = 0.5, 
     outer_level = 0.95, fill_color = "salmon")
## ci_level: 0.5 (50% intervals)
## outer_level: 0.95 (95% intervals)

#comparing with lm function

lm1 <- lm(y ~ x)
summary(lm1)
## 
## Call:
## lm(formula = y ~ x)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -0.49925 -0.17713  0.04898  0.16923  0.32829 
## 
## Coefficients:
##              Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 12.555888   0.071985   174.4   <2e-16 ***
## x           -0.054574   0.003137   -17.4   <2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.2205 on 37 degrees of freedom
## Multiple R-squared:  0.8911, Adjusted R-squared:  0.8881 
## F-statistic: 302.7 on 1 and 37 DF,  p-value: < 2.2e-16
lm_alpha <- summary(lm1)$coeff[1]  # the intercept
lm_beta <- summary(lm1)$coeff[2]  # the slope
lm_sigma <- sigma(lm1)  # the residual error

plot(y ~ x, pch = 20)

abline(lm1, col = 2, lty = 2, lw = 3)
abline( mean(posterior$alpha), mean(posterior$beta), col = 6, lw = 2)

The result is identical to the lm output. This is because we are using a simple model, and have put non-informative priors on our parameters.

One way to visualize the variability in our estimation of the regression line is to plot multiple estimates from the posterior.

library(gdata)
## gdata: read.xls support for 'XLS' (Excel 97-2004) files ENABLED.
## 
## gdata: read.xls support for 'XLSX' (Excel 2007+) files ENABLED.
## 
## Attaching package: 'gdata'
## The following objects are masked from 'package:dplyr':
## 
##     combine, first, last
## The following object is masked from 'package:purrr':
## 
##     keep
## The following object is masked from 'package:stats':
## 
##     nobs
## The following object is masked from 'package:utils':
## 
##     object.size
## The following object is masked from 'package:base':
## 
##     startsWith
plot(y ~ x, pch = 20)

for (i in 1:500) {
 abline(posterior$alpha[i], posterior$beta[i], col = "gray", lty = 1)
}

abline(mean(posterior$alpha), mean(posterior$beta), col = 6, lw = 2)

####################################################################

Let’s try again, but now with more informative priors for the relationship between sea ice and time. We’re going to use normal priors with small standard deviations. If we were to use normal priors with very large standard deviations (say 1000, or 10,000), they would act very similarly to uniform priors.

write("// Stan model for simple linear regression

data {
 int < lower = 1 > N; // Sample size
 vector[N] x; // Predictor
 vector[N] y; // Outcome
}

parameters {
 real alpha; // Intercept
 real beta; // Slope (regression coefficients)
 real < lower = 0 > sigma; // Error SD
}

model {
 alpha ~ normal(10, 0.1);
 beta ~ normal(1, 0.1);
 y ~ normal(alpha + x * beta , sigma);
}

generated quantities {}",

"stan_model2.stan")

stan_model2 <- "stan_model2.stan"

fit2 <- rstan::stan(stan_model2, data = stan_data, warmup = 500, 
                    iter = 1000, chains = 4, cores = 2, thin = 1)

posterior2 <- rstan::extract(fit2)

plot(y ~ x, pch = 20)

#abline(alpha, beta, col = 4, lty = 2, lw = 2)
abline(mean(posterior2$alpha), mean(posterior2$beta), col = 3, lw = 2)
abline(mean(posterior$alpha), mean(posterior$beta), col = 36, lw = 3)

This is a common issue in Bayesian modelling, if your prior distributions are very narrow and yet don’t fit your understanding of the system or the distribution of your data, you could run models that do not meaningfully explain variation in your data. However, that isn’t to say that you shouldn’t choose somewhat informative priors, you do want to use previous analyses and understanding of your study system inform your model priors and design. You just need to think carefully about each modelling decision you make!

Convergence Diagnostics For traceplots, we can view them directly from the posterior:

plot(posterior$alpha, type = "l")

plot(posterior$beta, type = "l")

plot(posterior$sigma, type = "l")

traceplot(stan_model1.fit)

Example of poor convergence

Try running a model for only 50 iterations and check the traceplots.

fit_bad <- rstan::stan(stan_model1, data = stan_data, warmup = 25, 
                       iter = 50, chains = 4, cores = 2, thin = 1)
## Warning: There were 8 divergent transitions after warmup. Increasing adapt_delta above 0.8 may help. See
## http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
## Warning: There were 4 chains where the estimated Bayesian Fraction of Missing Information was low. See
## http://mc-stan.org/misc/warnings.html#bfmi-low
## Warning: Examine the pairs() plot to diagnose sampling problems
posterior_bad <- rstan::extract(fit_bad)
plot(posterior_bad$alpha, type = "l")

plot(posterior_bad$beta, type = "l")

plot(posterior_bad$sigma, type = "l")

Parameter summaries

We can also get summaries of the parameters through the posterior directly. Let’s also plot the non-Bayesian linear model values to make sure our model is doing what we think it is…

par(mfrow = c(1,3))

plot(density(posterior$alpha), main = "Alpha")
abline(v = lm_alpha, col = 4, lty = 2)

plot(density(posterior$beta), main = "Beta")
abline(v = lm_beta, col = 4, lty = 2)

plot(density(posterior$sigma), main = "Sigma")
abline(v = lm_sigma, col = 4, lty = 2)

From the posterior we can directly calculate the probability of any parameter being over or under a certain value of interest.

#Probablility that beta is >0:

sum(posterior$beta>0)/length(posterior$beta)
## [1] 0
#Probablility that beta is >0.2:

sum(posterior$beta>0.2)/length(posterior$beta)
## [1] 0

Posterior Predictive Checks

For prediction and as another form of model diagnostic, Stan can use random number generators to generate predicted values for each data point, at each iteration. This way we can generate predictions that also represent the uncertainties in our model and our data generation process. We generate these using the Generated Quantities block. This block can be used to get any other information we want about the posterior, or make predictions for new data.

Note that vectorization is not supported in the GQ (generated quantities) block, so we have to put it in a loop. But since this is compiled to C++, loops are actually quite fast and Stan only evaluates the GQ block once per iteration, so it won’t add too much time to your sampling. Typically, the data generating functions will be the distributions you used in the model block but with an _rng suffix. (Double-check in the Stan manual to see which sampling statements have corresponding rng functions already coded up.)

write("// Stan model for simple linear regression

data {
 int < lower = 1 > N; // Sample size
 vector[N] x; // Predictor
 vector[N] y; // Outcome
}

parameters {
 real alpha; // Intercept
 real beta; // Slope (regression coefficients)
 real < lower = 0 > sigma; // Error SD
}

model {
 y ~ normal(x * beta + alpha, sigma);
}

generated quantities {
 real y_rep[N];

 for (n in 1:N) {
 y_rep[n] = normal_rng(x[n] * beta + alpha, sigma);
 }

}",

"stan_model2_GQ.stan")

stan_model2_GQ <- "stan_model2_GQ.stan"

fit3 <- stan(stan_model2_GQ, data = stan_data, iter = 1000, chains = 4, cores = 2, thin = 1)

#Extracting the y_rep values from posterior.
#Each row is an iteration (single posterior estimate) from the model.
y_rep <- as.matrix(fit3, pars = "y_rep")
dim(y_rep)
## [1] 2000   39
#Comparing density of y with densities of y over 200 posterior draws.

bayesplot::ppc_dens_overlay(y, y_rep[1:200, ])

#We can also use this to compare estimates of summary statistics.
bayesplot::ppc_stat(y = y, yrep = y_rep, stat = "mean")
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

#We can investigate mean posterior prediction per datapoint vs 
#the observed value for each datapoint (default line is 1:1)

bayesplot::ppc_scatter_avg(y = y, yrep = y_rep)

#Here is a list of currently available plots (bayesplot 1.2):
#available_ppc()
#can change the colour scheme in bayesplot too:
bayesplot::color_scheme_view(c("blue", "gray", "green", "pink", "purple",
 "red","teal","yellow"))

#And you can even mix them:

color_scheme_view("mix-blue-red")

#You can set color schemes with:

color_scheme_set("blue")

0.1.3 rats example (stan chunk -> sampling)

data {
  int<lower=0> N;
  int<lower=0> T;
  real x[T];
  real y[N,T];
  real xbar;
}
parameters {
  real alpha[N];
  real beta[N];

  real mu_alpha;
  real mu_beta;          // beta.c in original bugs model

  real<lower=0> sigmasq_y;
  real<lower=0> sigmasq_alpha;
  real<lower=0> sigmasq_beta;
}
transformed parameters {
  real<lower=0> sigma_y;       // sigma in original bugs model
  real<lower=0> sigma_alpha;
  real<lower=0> sigma_beta;

  sigma_y = sqrt(sigmasq_y);
  sigma_alpha = sqrt(sigmasq_alpha);
  sigma_beta =  sqrt(sigmasq_beta);
}
model {
  mu_alpha ~ normal(0, 100);
  mu_beta ~ normal(0, 100);
  sigmasq_y ~ inv_gamma(0.001, 0.001);
  sigmasq_alpha ~ inv_gamma(0.001, 0.001);
  sigmasq_beta ~ inv_gamma(0.001, 0.001);
  alpha ~ normal(mu_alpha, sigma_alpha); // vectorized
  beta ~ normal(mu_beta, sigma_beta);  // vectorized
  for (n in 1:N)
    for (t in 1:T) 
      y[n,t] ~ normal(alpha[n] + beta[n] * (x[t] - xbar), sigma_y);

}
generated quantities {
  real alpha0;
  alpha0 = mu_alpha - xbar * mu_beta;
}
df <- read_delim("rats.txt", delim = " ")
## Parsed with column specification:
## cols(
##   day8 = col_double(),
##   day15 = col_double(),
##   day22 = col_double(),
##   day29 = col_double(),
##   day36 = col_double()
## )
df
## # A tibble: 30 x 5
##     day8 day15 day22 day29 day36
##    <dbl> <dbl> <dbl> <dbl> <dbl>
##  1   151   199   246   283   320
##  2   145   199   249   293   354
##  3   147   214   263   312   328
##  4   155   200   237   272   297
##  5   135   188   230   280   323
##  6   159   210   252   298   331
##  7   141   189   231   275   305
##  8   159   201   248   297   338
##  9   177   236   285   350   376
## 10   134   182   220   260   296
## # … with 20 more rows
y <- as.matrix(df)
y
##       day8 day15 day22 day29 day36
##  [1,]  151   199   246   283   320
##  [2,]  145   199   249   293   354
##  [3,]  147   214   263   312   328
##  [4,]  155   200   237   272   297
##  [5,]  135   188   230   280   323
##  [6,]  159   210   252   298   331
##  [7,]  141   189   231   275   305
##  [8,]  159   201   248   297   338
##  [9,]  177   236   285   350   376
## [10,]  134   182   220   260   296
## [11,]  160   208   261   313   352
## [12,]  143   188   220   273   314
## [13,]  154   200   244   289   325
## [14,]  171   221   270   326   358
## [15,]  163   216   242   281   312
## [16,]  160   207   248   288   324
## [17,]  142   187   234   280   316
## [18,]  156   203   243   283   317
## [19,]  157   212   259   307   336
## [20,]  152   203   246   286   321
## [21,]  154   205   253   298   334
## [22,]  139   190   225   267   302
## [23,]  146   191   229   272   302
## [24,]  157   211   250   285   323
## [25,]  132   185   237   286   331
## [26,]  160   207   257   303   345
## [27,]  169   216   261   295   333
## [28,]  157   205   248   289   316
## [29,]  137   180   219   258   291
## [30,]  153   200   244   286   324
x <- c(8,15,22,29,36)
xbar <- mean(x)
N <- nrow(y)
T <- ncol(y)

#options(mc.cores = parallel::detectCores())
#sample from that posterior distribution 
rats_fit <- rstan::sampling(rats, 
                     data = list(y,x,xbar,N,T)) 
rstan::summary(rats_fit)
## $summary
##                       mean     se_mean          sd         2.5%
## alpha[1]       239.9423636 0.035460531  2.65861431  234.7015207
## alpha[2]       247.8115216 0.030460384  2.76890029  242.2790978
## alpha[3]       252.3839050 0.030565418  2.67086604  247.2134940
## alpha[4]       232.5616378 0.034935363  2.74889051  227.2728839
## alpha[5]       231.6280523 0.032108688  2.71148797  226.3501568
## alpha[6]       249.7467998 0.032877787  2.67092533  244.5119466
## alpha[7]       228.7301016 0.033954479  2.71566364  223.4256012
## alpha[8]       248.3745495 0.032293054  2.72708183  242.9824360
## alpha[9]       283.3184168 0.038237600  2.74919437  277.7747812
## alpha[10]      219.3058147 0.038588072  2.70624767  213.9719961
## alpha[11]      258.2128883 0.034691825  2.69043291  253.0147702
## alpha[12]      228.0948131 0.030772252  2.71005016  222.7503311
## alpha[13]      242.4267109 0.036220743  2.77187111  236.9996324
## alpha[14]      268.2864876 0.038500988  2.79160383  262.5676388
## alpha[15]      242.7794823 0.034391873  2.65111343  237.5600879
## alpha[16]      245.2343885 0.033597820  2.69465721  239.8749087
## alpha[17]      232.1875633 0.035421589  2.69711908  227.0003543
## alpha[18]      240.4572376 0.033560847  2.69045889  235.1512652
## alpha[19]      253.8629706 0.034699036  2.58908479  248.7854239
## alpha[20]      241.5859905 0.033154489  2.67239394  236.1653960
## alpha[21]      248.4987113 0.035937712  2.74609641  243.0970495
## alpha[22]      225.2371818 0.037140728  2.66906614  219.9947121
## alpha[23]      228.5580936 0.031687599  2.60368436  223.3925153
## alpha[24]      245.1335002 0.035075249  2.66584317  239.8671400
## alpha[25]      234.5158829 0.034702466  2.63730569  229.3578194
## alpha[26]      253.9658590 0.031793242  2.73627518  248.6212597
## alpha[27]      254.3168504 0.034360058  2.61977345  249.2867116
## alpha[28]      242.9833376 0.035038710  2.78768873  237.5120861
## alpha[29]      217.8842522 0.035690065  2.67259120  212.6607838
## alpha[30]      241.4622753 0.034044631  2.69759769  236.1616720
## beta[1]          6.0673544 0.003189642  0.23738328    5.5975804
## beta[2]          7.0470715 0.003872375  0.26222303    6.5299259
## beta[3]          6.4815410 0.003201047  0.24459524    6.0055090
## beta[4]          5.3435561 0.003646224  0.25856416    4.8357846
## beta[5]          6.5667093 0.003388175  0.24599493    6.0775243
## beta[6]          6.1748890 0.003089737  0.23782975    5.7129946
## beta[7]          5.9798747 0.003096754  0.24128755    5.5075274
## beta[8]          6.4172680 0.003110402  0.23727472    5.9625592
## beta[9]          7.0521993 0.003754736  0.26075569    6.5348339
## beta[10]         5.8480735 0.002998405  0.24161372    5.3814471
## beta[11]         6.7945641 0.003481356  0.25685861    6.2857155
## beta[12]         6.1226853 0.002986245  0.23954753    5.6490250
## beta[13]         6.1611288 0.003126127  0.24145814    5.6938569
## beta[14]         6.6895986 0.003700085  0.25159420    6.1937591
## beta[15]         5.4217237 0.003523983  0.25660777    4.9203261
## beta[16]         5.9257202 0.003074800  0.23979142    5.4459962
## beta[17]         6.2757767 0.003460226  0.24350305    5.8071308
## beta[18]         5.8463722 0.003112254  0.24133006    5.3770579
## beta[19]         6.4071010 0.003196043  0.24362494    5.9176442
## beta[20]         6.0536526 0.003429293  0.24200039    5.5791506
## beta[21]         6.4039059 0.003207626  0.24323390    5.9328430
## beta[22]         5.8541688 0.003359061  0.24281765    5.3891688
## beta[23]         5.7483359 0.002941236  0.24007328    5.2725238
## beta[24]         5.8882122 0.003180098  0.24233114    5.4124762
## beta[25]         6.9073171 0.003628716  0.25772900    6.4005913
## beta[26]         6.5414103 0.003356598  0.24181881    6.0648226
## beta[27]         5.9025639 0.003011125  0.24622937    5.4223795
## beta[28]         5.8450022 0.003323369  0.23855947    5.3902173
## beta[29]         5.6722914 0.002988541  0.24513917    5.1947596
## beta[30]         6.1342583 0.003108542  0.24199158    5.6612954
## mu_alpha       242.4648273 0.038818795  2.73820249  237.0960187
## mu_beta          6.1872145 0.001694249  0.10920281    5.9689385
## sigmasq_y       37.4192118 0.117932889  5.81371149   27.7264872
## sigmasq_alpha  217.6404292 0.984502601 63.66007081  126.1448131
## sigmasq_beta     0.2750722 0.001906184  0.10230921    0.1221348
## sigma_y          6.0991778 0.009491871  0.46829118    5.2655947
## sigma_alpha     14.6079901 0.030932661  2.06109627   11.2314208
## sigma_beta       0.5160493 0.001756162  0.09363456    0.3494779
## alpha0         106.3461082 0.053221778  3.63212195   99.2341019
## lp__          -438.5731922 0.210524407  6.94846182 -453.4916630
##                        25%          50%          75%        97.5%    n_eff
## alpha[1]       238.2439161  239.9606301  241.6640725  245.2355945 5621.086
## alpha[2]       245.9423120  247.7856041  249.6960132  253.2288694 8263.117
## alpha[3]       250.5600921  252.3925306  254.1767215  257.7140455 7635.606
## alpha[4]       230.7185388  232.5750502  234.3977610  238.0155463 6191.336
## alpha[5]       229.8287931  231.6521784  233.4806875  236.8554768 7131.325
## alpha[6]       247.9789404  249.7378589  251.5722437  254.9554258 6599.611
## alpha[7]       226.8763763  228.7517274  230.5903210  234.1073839 6396.727
## alpha[8]       246.5722076  248.3465609  250.2044895  253.7738104 7131.454
## alpha[9]       281.5022530  283.3248236  285.1420664  288.7976541 5169.275
## alpha[10]      217.4806855  219.2647915  221.1435904  224.5740921 4918.457
## alpha[11]      256.3956596  258.2435375  260.0273731  263.4395437 6014.368
## alpha[12]      226.2967869  228.0868685  229.9037639  233.4319425 7755.970
## alpha[13]      240.5923482  242.4629083  244.1837208  247.8907947 5856.409
## alpha[14]      266.4554596  268.2771922  270.1631866  273.7006444 5257.313
## alpha[15]      241.0108321  242.7902042  244.5028306  248.1422672 5942.169
## alpha[16]      243.4979222  245.2545028  246.9663804  250.6726238 6432.575
## alpha[17]      230.3547329  232.1876877  233.9590152  237.5017966 5797.812
## alpha[18]      238.6772272  240.4323683  242.2513309  245.8319623 6426.683
## alpha[19]      252.1480582  253.8614984  255.6219772  258.9337711 5567.468
## alpha[20]      239.8419887  241.5707446  243.3816135  246.8699859 6497.051
## alpha[21]      246.7562903  248.4581618  250.2712181  253.9902342 5838.895
## alpha[22]      223.5028868  225.1822740  227.0019788  230.5035491 5164.376
## alpha[23]      226.8292158  228.5054931  230.3231629  233.7092050 6751.465
## alpha[24]      243.3280162  245.2022046  246.9311406  250.3256922 5776.539
## alpha[25]      232.7538888  234.5195940  236.2382130  239.6417524 5775.642
## alpha[26]      252.1010864  253.9170295  255.8479158  259.3077855 7407.129
## alpha[27]      252.5790515  254.3357603  256.0468799  259.4710319 5813.259
## alpha[28]      241.1616376  242.9768786  244.8943459  248.4660048 6329.834
## alpha[29]      216.1099438  217.8885017  219.6219978  223.2313728 5607.514
## alpha[30]      239.6851875  241.4779965  243.2242165  246.8286887 6278.517
## beta[1]          5.9072665    6.0671840    6.2299582    6.5302016 5538.809
## beta[2]          6.8696511    7.0440767    7.2206856    7.5723704 4585.501
## beta[3]          6.3165035    6.4797134    6.6461727    6.9540902 5838.642
## beta[4]          5.1740300    5.3409118    5.5165690    5.8642401 5028.634
## beta[5]          6.3982182    6.5683787    6.7365131    7.0467165 5271.336
## beta[6]          6.0179339    6.1777041    6.3330721    6.6452563 5925.013
## beta[7]          5.8171082    5.9823869    6.1388474    6.4596504 6070.948
## beta[8]          6.2597219    6.4116730    6.5815006    6.8836697 5819.289
## beta[9]          6.8765419    7.0505844    7.2301852    7.5621915 4822.906
## beta[10]         5.6870270    5.8451578    6.0101527    6.3130055 6493.257
## beta[11]         6.6207338    6.7937623    6.9680101    7.3012702 5443.666
## beta[12]         5.9590104    6.1235402    6.2851452    6.5872088 6434.762
## beta[13]         5.9982602    6.1575251    6.3251251    6.6335993 5965.823
## beta[14]         6.5225038    6.6867863    6.8584613    7.1887118 4623.574
## beta[15]         5.2495575    5.4233379    5.5853903    5.9330845 5302.395
## beta[16]         5.7636695    5.9285765    6.0863414    6.3986640 6081.818
## beta[17]         6.1069377    6.2768303    6.4410318    6.7423943 4952.220
## beta[18]         5.6804845    5.8443994    6.0100062    6.3164912 6012.744
## beta[19]         6.2407714    6.4092864    6.5697028    6.8908925 5810.565
## beta[20]         5.8876952    6.0580890    6.2168526    6.5270978 4979.928
## beta[21]         6.2391621    6.3994012    6.5692892    6.8823353 5750.172
## beta[22]         5.6909059    5.8506241    6.0178560    6.3321926 5225.461
## beta[23]         5.5864242    5.7523503    5.9089197    6.2190269 6662.356
## beta[24]         5.7253799    5.8907869    6.0506633    6.3689152 5806.806
## beta[25]         6.7381525    6.9043686    7.0853425    7.3963535 5044.530
## beta[26]         6.3770798    6.5377777    6.7078135    7.0079045 5190.170
## beta[27]         5.7310350    5.9075051    6.0649487    6.3788626 6686.858
## beta[28]         5.6824358    5.8450499    6.0023782    6.3270140 5152.716
## beta[29]         5.5054952    5.6720448    5.8366665    6.1489748 6728.323
## beta[30]         5.9734451    6.1382932    6.2945717    6.6064410 6060.202
## mu_alpha       240.6290679  242.5348756  244.2426698  247.8022917 4975.618
## mu_beta          6.1149942    6.1874539    6.2574008    6.4038315 4154.448
## sigmasq_y       33.3623104   36.7789725   40.7810932   50.2842387 2430.172
## sigmasq_alpha  173.2806442  206.8259528  250.3078772  366.3284198 4181.196
## sigmasq_beta     0.2047543    0.2575639    0.3284388    0.5243521 2880.713
## sigma_y          5.7760116    6.0645670    6.3860076    7.0911380 2434.043
## sigma_alpha     13.1636106   14.3814447   15.8211211   19.1397077 4439.786
## sigma_beta       0.4524979    0.5075075    0.5730958    0.7241216 2842.780
## alpha0         103.9423545  106.3853531  108.8380283  113.3568320 4657.384
## lp__          -443.0411301 -438.2361095 -433.7310538 -426.1112303 1089.363
##                    Rhat
## alpha[1]      1.0002825
## alpha[2]      0.9991735
## alpha[3]      0.9998750
## alpha[4]      0.9992915
## alpha[5]      0.9992410
## alpha[6]      0.9992219
## alpha[7]      0.9994507
## alpha[8]      0.9993097
## alpha[9]      0.9996028
## alpha[10]     0.9993046
## alpha[11]     0.9997944
## alpha[12]     0.9998578
## alpha[13]     0.9993477
## alpha[14]     1.0004266
## alpha[15]     0.9998760
## alpha[16]     0.9996543
## alpha[17]     0.9997484
## alpha[18]     0.9998783
## alpha[19]     0.9992567
## alpha[20]     0.9995694
## alpha[21]     0.9994274
## alpha[22]     0.9993285
## alpha[23]     0.9996323
## alpha[24]     0.9996212
## alpha[25]     0.9998860
## alpha[26]     0.9997676
## alpha[27]     0.9996334
## alpha[28]     0.9994159
## alpha[29]     0.9998846
## alpha[30]     0.9994438
## beta[1]       0.9999028
## beta[2]       0.9995627
## beta[3]       0.9998139
## beta[4]       1.0000705
## beta[5]       0.9996401
## beta[6]       0.9998318
## beta[7]       0.9996440
## beta[8]       0.9995219
## beta[9]       1.0001891
## beta[10]      0.9994919
## beta[11]      0.9995338
## beta[12]      0.9996104
## beta[13]      0.9995256
## beta[14]      1.0000009
## beta[15]      0.9994658
## beta[16]      0.9997487
## beta[17]      0.9996308
## beta[18]      0.9997406
## beta[19]      0.9995902
## beta[20]      0.9996074
## beta[21]      0.9994073
## beta[22]      0.9991984
## beta[23]      0.9996317
## beta[24]      0.9990997
## beta[25]      0.9994139
## beta[26]      1.0001844
## beta[27]      0.9994710
## beta[28]      0.9995496
## beta[29]      1.0009717
## beta[30]      0.9993105
## mu_alpha      0.9998521
## mu_beta       0.9997042
## sigmasq_y     0.9996358
## sigmasq_alpha 1.0004121
## sigmasq_beta  0.9998460
## sigma_y       0.9996699
## sigma_alpha   1.0002866
## sigma_beta    0.9999001
## alpha0        1.0000332
## lp__          1.0015828
## 
## $c_summary
## , , chains = chain:1
## 
##                stats
## parameter               mean          sd         2.5%          25%
##   alpha[1]       240.0390705  2.77747547  234.5515535  238.1966606
##   alpha[2]       247.8289717  2.78041896  242.2246243  246.0376663
##   alpha[3]       252.2816265  2.79273773  246.8732790  250.4882474
##   alpha[4]       232.5054092  2.64330801  227.3823982  230.7489448
##   alpha[5]       231.6531885  2.82935263  226.2366066  229.7306793
##   alpha[6]       249.7241399  2.64122898  244.5824628  247.9798228
##   alpha[7]       228.7377520  2.73681461  223.4826062  226.7978244
##   alpha[8]       248.3293990  2.63577160  243.1214714  246.5067438
##   alpha[9]       283.2821191  2.73091018  278.1582898  281.4388428
##   alpha[10]      219.3799156  2.73900545  213.7341366  217.4359502
##   alpha[11]      258.1773760  2.62857285  252.8959329  256.4170162
##   alpha[12]      228.0795376  2.72170915  222.7622171  226.3290204
##   alpha[13]      242.3789902  2.90868602  236.6173132  240.4350772
##   alpha[14]      268.1452676  2.78344954  262.5061845  266.3291359
##   alpha[15]      242.8076834  2.57055708  237.8666252  241.1174159
##   alpha[16]      245.3178396  2.69086880  240.1835868  243.4923096
##   alpha[17]      232.2430098  2.72098908  226.9408129  230.3906280
##   alpha[18]      240.4842433  2.75179335  235.1161446  238.6541209
##   alpha[19]      253.8093165  2.69232048  248.3938904  251.9944848
##   alpha[20]      241.6415487  2.70149361  236.0479594  239.8932828
##   alpha[21]      248.4579714  2.82326933  243.1124893  246.6314771
##   alpha[22]      225.2446358  2.61744453  220.0212484  223.5039298
##   alpha[23]      228.5831468  2.59116018  223.3017259  226.9639223
##   alpha[24]      245.1203126  2.73180975  239.5674230  243.3034755
##   alpha[25]      234.4012620  2.63635980  229.1061383  232.6326279
##   alpha[26]      253.9371363  2.78317333  248.5071005  252.0310412
##   alpha[27]      254.2767324  2.75489342  248.9828334  252.5015264
##   alpha[28]      242.9797324  2.77115989  237.4373023  241.2417234
##   alpha[29]      217.9830216  2.64069529  212.7476814  216.3935019
##   alpha[30]      241.4392592  2.74699781  236.2178156  239.6178525
##   beta[1]          6.0646208  0.24618146    5.5636564    5.8999463
##   beta[2]          7.0485311  0.25250652    6.5625904    6.8742352
##   beta[3]          6.4831391  0.24661829    5.9990555    6.3072900
##   beta[4]          5.3390433  0.26303769    4.8326282    5.1612317
##   beta[5]          6.5747331  0.24727359    6.0853634    6.4034473
##   beta[6]          6.1671038  0.24873441    5.6964057    6.0034742
##   beta[7]          5.9767075  0.22929568    5.5454244    5.8207104
##   beta[8]          6.4210728  0.23797498    5.9766929    6.2596404
##   beta[9]          7.0507340  0.25857969    6.5271913    6.8842888
##   beta[10]         5.8542943  0.25328735    5.3661190    5.6800479
##   beta[11]         6.7939692  0.26048768    6.2825019    6.6206139
##   beta[12]         6.1273838  0.22511578    5.7030504    5.9645722
##   beta[13]         6.1622297  0.23778436    5.6926036    6.0105858
##   beta[14]         6.6869133  0.26299801    6.1858833    6.5085781
##   beta[15]         5.4253436  0.24956248    4.9392069    5.2650536
##   beta[16]         5.9324380  0.23825077    5.4505196    5.7766700
##   beta[17]         6.2684558  0.24970239    5.8071053    6.0847965
##   beta[18]         5.8477836  0.24207924    5.3611826    5.6792482
##   beta[19]         6.3988595  0.24778805    5.8968707    6.2270025
##   beta[20]         6.0488247  0.23734002    5.5923237    5.8799583
##   beta[21]         6.4057727  0.25004382    5.9427652    6.2394826
##   beta[22]         5.8546661  0.23700626    5.3820963    5.6989738
##   beta[23]         5.7585814  0.23618471    5.2737493    5.6040658
##   beta[24]         5.8870818  0.24823457    5.4086104    5.7153864
##   beta[25]         6.9106135  0.25244228    6.3877220    6.7569373
##   beta[26]         6.5279446  0.23987782    6.0743866    6.3592573
##   beta[27]         5.9089571  0.23917322    5.4534926    5.7431662
##   beta[28]         5.8409816  0.23456311    5.3963805    5.6821667
##   beta[29]         5.6848321  0.25414016    5.2031353    5.5143125
##   beta[30]         6.1381910  0.24636684    5.6489866    5.9763221
##   mu_alpha       242.3911593  2.64594471  237.0412124  240.6948894
##   mu_beta          6.1879492  0.11156956    5.9662191    6.1142052
##   sigmasq_y       37.6006074  5.45631141   28.7282188   33.7010318
##   sigmasq_alpha  214.9547221 63.43141211  126.7760894  170.5966981
##   sigmasq_beta     0.2736480  0.09842499    0.1303939    0.2052591
##   sigma_y          6.1161619  0.43973279    5.3598711    5.8052590
##   sigma_alpha     14.5165909  2.05609817   11.2594888   13.0612614
##   sigma_beta       0.5151938  0.09072805    0.3611010    0.4530553
##   alpha0         106.2562770  3.65840811   98.7128474  103.8298535
##   lp__          -438.9142359  6.58625968 -452.8015137 -443.1529108
##                stats
## parameter                50%          75%        97.5%
##   alpha[1]       240.0049968  241.9079435  245.3531780
##   alpha[2]       247.8665302  249.6952359  253.2032381
##   alpha[3]       252.2028243  254.0586645  257.8342137
##   alpha[4]       232.5470022  234.2887529  237.5481649
##   alpha[5]       231.7120322  233.5228220  237.3914080
##   alpha[6]       249.7052877  251.3595453  254.9054679
##   alpha[7]       228.7781472  230.5891129  234.0219862
##   alpha[8]       248.2792183  250.2492555  253.6750972
##   alpha[9]       283.2679072  284.9927326  288.8096197
##   alpha[10]      219.3740363  221.2257813  224.7096148
##   alpha[11]      258.2271618  260.0020178  263.1044600
##   alpha[12]      228.0051686  229.9156527  233.3263631
##   alpha[13]      242.4199743  244.2479100  248.3300187
##   alpha[14]      268.1648882  270.1747310  273.2445425
##   alpha[15]      242.8204255  244.4482337  248.1623586
##   alpha[16]      245.3414940  247.1332353  250.7659444
##   alpha[17]      232.3634931  234.0295692  237.4081239
##   alpha[18]      240.3667093  242.3271936  246.0370301
##   alpha[19]      253.7551980  255.5863431  259.2391204
##   alpha[20]      241.7542628  243.4672220  246.7447566
##   alpha[21]      248.3414513  250.2901422  253.9824580
##   alpha[22]      225.2045754  226.9306012  230.5185681
##   alpha[23]      228.5861075  230.2923455  233.7449064
##   alpha[24]      245.1751160  246.9921621  250.4034552
##   alpha[25]      234.4580719  236.1246602  239.5059219
##   alpha[26]      253.8678997  255.8248974  259.2594490
##   alpha[27]      254.2273159  255.9903232  260.0762053
##   alpha[28]      242.9546807  244.8129327  248.4931572
##   alpha[29]      217.9554798  219.6773626  223.2159810
##   alpha[30]      241.3606604  243.2734997  246.9005053
##   beta[1]          6.0657714    6.2267533    6.5411478
##   beta[2]          7.0420290    7.2136720    7.5707532
##   beta[3]          6.4935728    6.6585983    6.9558966
##   beta[4]          5.3498373    5.5188910    5.8495994
##   beta[5]          6.5805027    6.7541293    7.0270143
##   beta[6]          6.1692491    6.3387680    6.6420069
##   beta[7]          5.9752484    6.1236189    6.4348417
##   beta[8]          6.4156885    6.5858762    6.8789898
##   beta[9]          7.0545788    7.2179969    7.5875566
##   beta[10]         5.8578443    6.0164959    6.3719387
##   beta[11]         6.7927858    6.9750531    7.2952982
##   beta[12]         6.1316878    6.2786938    6.5622480
##   beta[13]         6.1574582    6.3258792    6.6215082
##   beta[14]         6.6784938    6.8683622    7.2072059
##   beta[15]         5.4239429    5.5858418    5.9208486
##   beta[16]         5.9400604    6.0936517    6.3919837
##   beta[17]         6.2756603    6.4412693    6.7423943
##   beta[18]         5.8526137    6.0203781    6.3029977
##   beta[19]         6.3985646    6.5574143    6.9040604
##   beta[20]         6.0533091    6.2044910    6.5263821
##   beta[21]         6.4029189    6.5692686    6.9044388
##   beta[22]         5.8505403    6.0147384    6.3336006
##   beta[23]         5.7581105    5.9099115    6.2191784
##   beta[24]         5.8952640    6.0554392    6.3689735
##   beta[25]         6.9070207    7.0848504    7.3819472
##   beta[26]         6.5204627    6.6941387    6.9881646
##   beta[27]         5.9061042    6.0618565    6.3702267
##   beta[28]         5.8489005    5.9968392    6.2816547
##   beta[29]         5.6840094    5.8694872    6.1763974
##   beta[30]         6.1552197    6.2983552    6.6191776
##   mu_alpha       242.4303837  244.0074894  247.8397899
##   mu_beta          6.1876292    6.2603978    6.3978222
##   sigmasq_y       37.1302445   40.6758740   49.6105492
##   sigmasq_alpha  202.5336159  248.9196018  364.0120276
##   sigmasq_beta     0.2570650    0.3242251    0.5027588
##   sigma_y          6.0934591    6.3777632    7.0434755
##   sigma_alpha     14.2314249   15.7771861   19.0790991
##   sigma_beta       0.5070158    0.5694077    0.7090549
##   alpha0         106.3608188  108.5588998  113.5863547
##   lp__          -438.7379080 -434.3716299 -426.7328126
## 
## , , chains = chain:2
## 
##                stats
## parameter               mean          sd         2.5%          25%
##   alpha[1]       239.8992432  2.69892734  234.7378815  238.2156662
##   alpha[2]       247.8210949  2.76037269  242.5983947  245.8466954
##   alpha[3]       252.4153983  2.72203997  247.0410108  250.5160925
##   alpha[4]       232.5900632  2.87520146  226.7643314  230.7219818
##   alpha[5]       231.6685810  2.75496668  226.5124899  229.8042256
##   alpha[6]       249.6915105  2.76351273  244.2131590  247.8759144
##   alpha[7]       228.7764707  2.63071651  223.6698974  227.0297421
##   alpha[8]       248.4133254  2.86090140  242.8225253  246.5693100
##   alpha[9]       283.2086876  2.64969830  277.8660948  281.5393406
##   alpha[10]      219.3068421  2.65144686  214.3261729  217.4901025
##   alpha[11]      258.1816620  2.55967206  253.2679794  256.5317354
##   alpha[12]      228.1801660  2.76647793  222.7535320  226.3542072
##   alpha[13]      242.4900749  2.79388153  237.0258422  240.5311717
##   alpha[14]      268.1875316  2.86718926  262.6079748  266.2497489
##   alpha[15]      242.8254186  2.56796615  237.6446300  241.2140847
##   alpha[16]      245.2262017  2.69318779  239.7281691  243.6332265
##   alpha[17]      232.0954936  2.66843956  227.0574138  230.3652638
##   alpha[18]      240.5043702  2.85951816  234.9818797  238.5739129
##   alpha[19]      253.8546517  2.44932188  248.9342403  252.2394824
##   alpha[20]      241.5517547  2.66213414  236.1673239  239.8027740
##   alpha[21]      248.4570174  2.84693353  242.6238441  246.8087895
##   alpha[22]      225.2150453  2.65740854  219.8870643  223.4674025
##   alpha[23]      228.4962183  2.66783562  223.4077140  226.6865826
##   alpha[24]      245.0697454  2.68414622  239.9522043  243.2312163
##   alpha[25]      234.5865739  2.64117212  229.5684882  232.7613522
##   alpha[26]      254.0173158  2.77729956  248.7412908  252.0658798
##   alpha[27]      254.2842564  2.63714811  249.3321819  252.4688180
##   alpha[28]      243.0444519  2.78423663  237.5302422  241.1701476
##   alpha[29]      217.8896669  2.75963529  212.5246181  216.0145355
##   alpha[30]      241.5157357  2.74546756  236.1523641  239.6961222
##   beta[1]          6.0573158  0.23016891    5.6199117    5.9037498
##   beta[2]          7.0474316  0.26413066    6.5237735    6.8701686
##   beta[3]          6.4881542  0.25480484    5.9857399    6.3284416
##   beta[4]          5.3496765  0.25192153    4.8485530    5.1837660
##   beta[5]          6.5661041  0.23955718    6.1087806    6.3977878
##   beta[6]          6.1742621  0.22784280    5.7036824    6.0301019
##   beta[7]          5.9824420  0.25618728    5.4853484    5.8005278
##   beta[8]          6.4154407  0.23699153    5.9519064    6.2647156
##   beta[9]          7.0641119  0.26179770    6.5496155    6.8884156
##   beta[10]         5.8444895  0.24073399    5.3843883    5.6724725
##   beta[11]         6.8019627  0.26217194    6.2804553    6.6322960
##   beta[12]         6.1259455  0.23934970    5.6214056    5.9726347
##   beta[13]         6.1648674  0.25401839    5.6822744    5.9899174
##   beta[14]         6.6890244  0.23705182    6.2086611    6.5410728
##   beta[15]         5.4120025  0.26653658    4.8808262    5.2460806
##   beta[16]         5.9178731  0.23915403    5.4338849    5.7523506
##   beta[17]         6.2811016  0.24599456    5.8139696    6.1102986
##   beta[18]         5.8438826  0.23313682    5.3933041    5.6858041
##   beta[19]         6.4105074  0.23885752    5.9191725    6.2478695
##   beta[20]         6.0581522  0.23881734    5.5920421    5.8974236
##   beta[21]         6.4113397  0.25588897    5.9185215    6.2309670
##   beta[22]         5.8590816  0.24253251    5.3744080    5.7035291
##   beta[23]         5.7455624  0.24181873    5.2947563    5.5775285
##   beta[24]         5.8883991  0.24373704    5.4075930    5.7287989
##   beta[25]         6.9163324  0.26050759    6.4233890    6.7388996
##   beta[26]         6.5509887  0.23779361    6.0810311    6.3866232
##   beta[27]         5.8997607  0.25135103    5.3976942    5.7287077
##   beta[28]         5.8465375  0.23823476    5.3937146    5.6828447
##   beta[29]         5.6655534  0.23652092    5.2174586    5.5105208
##   beta[30]         6.1389688  0.23696618    5.6882678    5.9707680
##   mu_alpha       242.5150160  2.89266711  236.9602001  240.5365451
##   mu_beta          6.1867656  0.11233283    5.9780870    6.1066705
##   sigmasq_y       37.4900586  6.10058209   27.8597686   33.2071987
##   sigmasq_alpha  220.8643839 70.03150125  123.9375976  175.0496900
##   sigmasq_beta     0.2780704  0.10629732    0.1220801    0.2041059
##   sigma_y          6.1034804  0.48767201    5.2782342    5.7625687
##   sigma_alpha     14.6956033  2.21552174   11.1327234   13.2306345
##   sigma_beta       0.5183960  0.09667165    0.3493995    0.4517809
##   alpha0         106.4061735  3.72110753   99.3697314  103.7939684
##   lp__          -438.9511410  7.19106927 -453.9419016 -443.3296283
##                stats
## parameter                50%          75%        97.5%
##   alpha[1]       239.8751084  241.5815583  245.6331412
##   alpha[2]       247.7407502  249.6688850  253.2440827
##   alpha[3]       252.4896304  254.3075815  257.7143124
##   alpha[4]       232.5680599  234.3994593  238.3154825
##   alpha[5]       231.7105989  233.5345041  236.7775591
##   alpha[6]       249.7047247  251.5124357  255.1592652
##   alpha[7]       228.8220496  230.5975565  234.0860614
##   alpha[8]       248.3680702  250.1997024  253.9529363
##   alpha[9]       283.1602984  284.9324887  288.5227017
##   alpha[10]      219.2362163  221.0213783  224.5854767
##   alpha[11]      258.1788881  259.8770721  263.2892243
##   alpha[12]      228.1511049  230.0212201  233.6857879
##   alpha[13]      242.6347989  244.3889866  247.8665371
##   alpha[14]      268.1485619  270.1318605  274.1549962
##   alpha[15]      242.8010299  244.4012901  247.9705173
##   alpha[16]      245.1770607  246.9488390  250.6478050
##   alpha[17]      232.0938687  233.7849763  237.3390792
##   alpha[18]      240.6071843  242.4813793  246.1617560
##   alpha[19]      253.9285863  255.5641089  258.5040929
##   alpha[20]      241.4946290  243.3603308  246.8416636
##   alpha[21]      248.4005955  250.2727172  254.2154008
##   alpha[22]      225.1548887  227.0353597  230.3951741
##   alpha[23]      228.3358991  230.3196851  233.8359853
##   alpha[24]      245.2039688  246.9308466  250.3260465
##   alpha[25]      234.5689220  236.3177374  239.6626567
##   alpha[26]      254.0548001  255.9985773  259.3843700
##   alpha[27]      254.3596260  255.9442190  259.6527384
##   alpha[28]      243.1186334  244.9997467  248.3295315
##   alpha[29]      217.8205926  219.7858166  223.3504631
##   alpha[30]      241.5576367  243.3048428  246.8847083
##   beta[1]          6.0586748    6.2232951    6.4998601
##   beta[2]          7.0445780    7.2244737    7.5750860
##   beta[3]          6.4814332    6.6473832    6.9595302
##   beta[4]          5.3385787    5.5193916    5.8503525
##   beta[5]          6.5703850    6.7290925    7.0523794
##   beta[6]          6.1796967    6.3201766    6.6281908
##   beta[7]          5.9876902    6.1489602    6.5011017
##   beta[8]          6.4125712    6.5755306    6.8802645
##   beta[9]          7.0693474    7.2436513    7.5623427
##   beta[10]         5.8442424    6.0167802    6.2906208
##   beta[11]         6.8046566    6.9746540    7.3237331
##   beta[12]         6.1176570    6.2927932    6.5766765
##   beta[13]         6.1636767    6.3352096    6.6583638
##   beta[14]         6.6839842    6.8483636    7.1473969
##   beta[15]         5.4084560    5.5807277    5.9474377
##   beta[16]         5.9236249    6.0715248    6.4037915
##   beta[17]         6.2757607    6.4494683    6.7574281
##   beta[18]         5.8358162    5.9951435    6.3165965
##   beta[19]         6.4143518    6.5753877    6.8645662
##   beta[20]         6.0621828    6.2139394    6.5331129
##   beta[21]         6.4115937    6.5899955    6.9251032
##   beta[22]         5.8571045    6.0183654    6.3217067
##   beta[23]         5.7504284    5.9042901    6.2060900
##   beta[24]         5.8900876    6.0476794    6.3945996
##   beta[25]         6.9073395    7.1041589    7.3899983
##   beta[26]         6.5455027    6.7072059    7.0135513
##   beta[27]         5.9032834    6.0685271    6.3637634
##   beta[28]         5.8478040    6.0037379    6.3343518
##   beta[29]         5.6575829    5.8181441    6.1292523
##   beta[30]         6.1420114    6.3070750    6.5929311
##   mu_alpha       242.6274339  244.5057405  247.8144113
##   mu_beta          6.1914206    6.2582884    6.4183551
##   sigmasq_y       36.6798580   40.8356130   51.1713920
##   sigmasq_alpha  207.9591212  254.2352533  390.2470731
##   sigmasq_beta     0.2622215    0.3367826    0.5322772
##   sigma_y          6.0563897    6.3902748    7.1534166
##   sigma_alpha     14.4207875   15.9447559   19.7546649
##   sigma_beta       0.5120756    0.5803298    0.7295732
##   alpha0         106.3694405  109.0048390  113.5326277
##   lp__          -438.6071415 -433.9940791 -426.4249314
## 
## , , chains = chain:3
## 
##                stats
## parameter               mean          sd         2.5%          25%
##   alpha[1]       239.9012585  2.46276920  234.8977701  238.2954394
##   alpha[2]       247.8271512  2.76157914  242.2780347  245.9529171
##   alpha[3]       252.4514987  2.52498637  247.6723122  250.8606487
##   alpha[4]       232.5938050  2.78178072  227.2526214  230.7331855
##   alpha[5]       231.5502935  2.64423030  226.3977583  229.8849894
##   alpha[6]       249.7819591  2.61869963  244.6245059  248.0492070
##   alpha[7]       228.6441888  2.69051329  223.4134219  226.7938713
##   alpha[8]       248.3330291  2.79223412  242.8156926  246.5354706
##   alpha[9]       283.4045114  2.87099567  277.5194875  281.4628117
##   alpha[10]      219.3079432  2.79635043  213.7400761  217.5765031
##   alpha[11]      258.2718656  2.86738035  252.8610744  256.2842155
##   alpha[12]      228.0421646  2.67634673  223.0112493  226.2707460
##   alpha[13]      242.3590171  2.69201258  237.0826346  240.7047002
##   alpha[14]      268.4722186  2.70681122  262.9502729  266.7348729
##   alpha[15]      242.6340198  2.80421996  237.2118478  240.6325512
##   alpha[16]      245.1184586  2.78162820  239.5967134  243.3172919
##   alpha[17]      232.1713488  2.64976314  226.9519764  230.2935995
##   alpha[18]      240.4962213  2.53317267  235.5912193  238.8105179
##   alpha[19]      253.8676070  2.50869441  248.7847855  252.2516178
##   alpha[20]      241.5818984  2.58139861  236.2736987  239.8718622
##   alpha[21]      248.5044986  2.63148964  243.0895460  246.7603232
##   alpha[22]      225.2844821  2.67385752  220.1832829  223.5810043
##   alpha[23]      228.4847273  2.57401738  223.1527649  226.7849513
##   alpha[24]      245.1341065  2.73657185  239.7506941  243.1932587
##   alpha[25]      234.4999847  2.65189508  229.2875716  232.8098707
##   alpha[26]      253.9580077  2.70566343  248.4797335  252.2662124
##   alpha[27]      254.2889022  2.52722311  249.3688314  252.6271634
##   alpha[28]      242.9696691  2.72316150  237.8009048  241.2420580
##   alpha[29]      217.7983190  2.61250702  212.6907466  216.0196028
##   alpha[30]      241.4150903  2.68675271  235.9822095  239.7610576
##   beta[1]          6.0716072  0.23751311    5.6185607    5.9062878
##   beta[2]          7.0537480  0.26627125    6.5236412    6.8695866
##   beta[3]          6.4760387  0.23884865    6.0192754    6.3107339
##   beta[4]          5.3310049  0.26080481    4.8090289    5.1696015
##   beta[5]          6.5638607  0.25814281    6.0594963    6.3877291
##   beta[6]          6.1725495  0.24743534    5.7179184    6.0006167
##   beta[7]          5.9802333  0.24715022    5.4865931    5.8250234
##   beta[8]          6.4184519  0.22972555    5.9625592    6.2661872
##   beta[9]          7.0535079  0.26357586    6.5296019    6.8762601
##   beta[10]         5.8438440  0.24367083    5.3566846    5.6874706
##   beta[11]         6.7948927  0.25411242    6.2739900    6.6298921
##   beta[12]         6.1196159  0.25190235    5.6237162    5.9470048
##   beta[13]         6.1560028  0.24087312    5.6701937    5.9946140
##   beta[14]         6.6830252  0.25170320    6.1942972    6.5117577
##   beta[15]         5.4214637  0.24709370    4.9485506    5.2416192
##   beta[16]         5.9251011  0.24264253    5.4417823    5.7691494
##   beta[17]         6.2816875  0.23687709    5.8050487    6.1241916
##   beta[18]         5.8473367  0.25057380    5.3614432    5.6668036
##   beta[19]         6.4080340  0.24992718    5.9066984    6.2334291
##   beta[20]         6.0496155  0.25280418    5.5712480    5.8840474
##   beta[21]         6.3973405  0.23179322    5.9561988    6.2382309
##   beta[22]         5.8472864  0.25268662    5.3887746    5.6723787
##   beta[23]         5.7448631  0.23875592    5.2734916    5.5892304
##   beta[24]         5.8881532  0.24994275    5.4135944    5.7140267
##   beta[25]         6.9002497  0.26578611    6.3943621    6.7251512
##   beta[26]         6.5347442  0.24385953    6.0678186    6.3675230
##   beta[27]         5.8935240  0.24916915    5.4115913    5.7144381
##   beta[28]         5.8410699  0.23662416    5.3937438    5.6803174
##   beta[29]         5.6726006  0.25076367    5.1502955    5.5032841
##   beta[30]         6.1294820  0.23886252    5.6414069    5.9780103
##   mu_alpha       242.4769774  2.77901606  237.1882659  240.5357637
##   mu_beta          6.1844282  0.10357614    5.9687158    6.1184845
##   sigmasq_y       37.4554255  6.02093987   27.2293729   33.2620167
##   sigmasq_alpha  217.2180687 57.99871421  129.9188938  175.6455945
##   sigmasq_beta     0.2780904  0.10261547    0.1191206    0.2070455
##   sigma_y          6.1009228  0.48414987    5.2181771    5.7673231
##   sigma_alpha     14.6135550  1.91461365   11.3981961   13.2531353
##   sigma_beta       0.5189007  0.09402814    0.3451381    0.4550225
##   alpha0         106.4195578  3.62362286   99.4345800  104.0427557
##   lp__          -438.4851171  7.00047077 -454.2334011 -442.8440556
##                stats
## parameter                50%          75%        97.5%
##   alpha[1]       240.0609963  241.4867412  244.7477041
##   alpha[2]       247.7663670  249.7272113  253.2514405
##   alpha[3]       252.5122500  254.1000193  257.3196325
##   alpha[4]       232.6805733  234.3897678  238.2880666
##   alpha[5]       231.5301521  233.3682783  236.5279128
##   alpha[6]       249.7912912  251.6621788  255.0673823
##   alpha[7]       228.5864617  230.5802541  233.8674490
##   alpha[8]       248.3299083  250.1839940  253.6761576
##   alpha[9]       283.5673626  285.4555673  289.0333840
##   alpha[10]      219.2961057  221.1814212  224.9560712
##   alpha[11]      258.2811198  260.2457836  263.8020091
##   alpha[12]      228.0578797  229.8579960  233.1395302
##   alpha[13]      242.3666741  244.0281322  247.8062175
##   alpha[14]      268.3875432  270.2372151  273.5540521
##   alpha[15]      242.6744814  244.5463725  248.0583289
##   alpha[16]      245.2159132  246.7927543  250.6122250
##   alpha[17]      232.1819319  233.9522429  237.3142323
##   alpha[18]      240.4299666  242.2014411  245.3577169
##   alpha[19]      253.8807920  255.6210700  258.7033601
##   alpha[20]      241.5992314  243.2396494  246.6555282
##   alpha[21]      248.5181406  250.1440409  254.1521934
##   alpha[22]      225.1964475  227.0325701  230.4986136
##   alpha[23]      228.4870911  230.2593152  233.4858114
##   alpha[24]      245.2343506  246.9604526  250.5751527
##   alpha[25]      234.5154642  236.1786261  239.6817791
##   alpha[26]      253.8933201  255.6938377  259.2659931
##   alpha[27]      254.3199312  256.0016069  259.2114961
##   alpha[28]      242.9143375  244.7586532  248.5080831
##   alpha[29]      217.8748405  219.4413369  222.9991742
##   alpha[30]      241.4932451  243.1465433  246.5652482
##   beta[1]          6.0724722    6.2369830    6.5430481
##   beta[2]          7.0608729    7.2358782    7.5987225
##   beta[3]          6.4783457    6.6430625    6.9244768
##   beta[4]          5.3236781    5.5020474    5.8530635
##   beta[5]          6.5623696    6.7403060    7.0583148
##   beta[6]          6.1762289    6.3366821    6.6778041
##   beta[7]          5.9812683    6.1427354    6.4719431
##   beta[8]          6.4150870    6.5847713    6.8513143
##   beta[9]          7.0482500    7.2331815    7.5472417
##   beta[10]         5.8391442    6.0060356    6.3108323
##   beta[11]         6.8000652    6.9603037    7.2939547
##   beta[12]         6.1234669    6.2954634    6.6272752
##   beta[13]         6.1508816    6.3137544    6.6187737
##   beta[14]         6.6792715    6.8585382    7.1924870
##   beta[15]         5.4195446    5.5826990    5.9046329
##   beta[16]         5.9184411    6.0847622    6.4169127
##   beta[17]         6.2807079    6.4341176    6.7406805
##   beta[18]         5.8433013    6.0320343    6.3177309
##   beta[19]         6.4134506    6.5786872    6.8913997
##   beta[20]         6.0500586    6.2202338    6.5486809
##   beta[21]         6.3895459    6.5638807    6.8394423
##   beta[22]         5.8426082    6.0223414    6.3331695
##   beta[23]         5.7512483    5.9006238    6.2211335
##   beta[24]         5.8941731    6.0548914    6.3715713
##   beta[25]         6.8989088    7.0884750    7.4098966
##   beta[26]         6.5347779    6.7050698    6.9863712
##   beta[27]         5.9010271    6.0576489    6.4378063
##   beta[28]         5.8424839    5.9993762    6.2919355
##   beta[29]         5.6825109    5.8316728    6.1634784
##   beta[30]         6.1224999    6.2833441    6.6055375
##   mu_alpha       242.5246909  244.3571940  247.9183051
##   mu_beta          6.1824509    6.2521207    6.3865723
##   sigmasq_y       36.5362797   41.1959555   50.9968741
##   sigmasq_alpha  209.8026859  248.5135515  357.4911719
##   sigmasq_beta     0.2612416    0.3342598    0.5244008
##   sigma_y          6.0445246    6.4184075    7.1412096
##   sigma_alpha     14.4845671   15.7643126   18.9074369
##   sigma_beta       0.5111180    0.5781520    0.7241552
##   alpha0         106.4915608  109.0069442  113.1628254
##   lp__          -438.0985652 -433.6225985 -426.1409040
## 
## , , chains = chain:4
## 
##                stats
## parameter               mean          sd         2.5%          25%
##   alpha[1]       239.9298822  2.68649682  234.6720076  238.2331329
##   alpha[2]       247.7688688  2.77688497  242.2647112  245.8763670
##   alpha[3]       252.3870963  2.63723986  247.2395421  250.5085574
##   alpha[4]       232.5572740  2.69278571  227.5484786  230.6835661
##   alpha[5]       231.6401461  2.61442907  226.6341180  229.9414900
##   alpha[6]       249.7895897  2.66072667  244.5119244  248.0407168
##   alpha[7]       228.7619948  2.80377603  223.1712046  226.8192254
##   alpha[8]       248.4224446  2.61420099  243.2564597  246.6352438
##   alpha[9]       283.3783490  2.74029223  277.7514733  281.6187105
##   alpha[10]      219.2285582  2.63699174  214.1244457  217.4548498
##   alpha[11]      258.2206493  2.69934939  253.0229528  256.3222344
##   alpha[12]      228.0773842  2.67675276  222.5833035  226.2573137
##   alpha[13]      242.4787612  2.68875308  237.3425781  240.6504918
##   alpha[14]      268.3409328  2.79877738  262.3578944  266.4810836
##   alpha[15]      242.8508072  2.65323791  237.6294515  241.0880781
##   alpha[16]      245.2750543  2.61014898  240.0179813  243.6192639
##   alpha[17]      232.2404009  2.74944249  227.1137386  230.3066778
##   alpha[18]      240.3441157  2.60625302  235.1291523  238.6728206
##   alpha[19]      253.9203072  2.69925773  248.8481454  252.0131167
##   alpha[20]      241.5687600  2.74499222  236.3403942  239.7331818
##   alpha[21]      248.5753578  2.67897262  243.5008838  246.8577574
##   alpha[22]      225.2045642  2.72962427  219.8931444  223.4053857
##   alpha[23]      228.6682820  2.58031412  223.9688477  226.9328264
##   alpha[24]      245.2098362  2.50637060  240.2995447  243.5747939
##   alpha[25]      234.5757111  2.61948322  229.6234026  232.7817125
##   alpha[26]      253.9509762  2.68094585  248.7717571  252.0830556
##   alpha[27]      254.4175107  2.55519437  249.6496907  252.6903073
##   alpha[28]      242.9394969  2.87322096  237.2753189  240.9233629
##   alpha[29]      217.8660015  2.67597708  212.9181628  216.0347046
##   alpha[30]      241.4790160  2.61186840  236.3460045  239.7034313
##   beta[1]          6.0758738  0.23532506    5.6053797    5.9150415
##   beta[2]          7.0385754  0.26590619    6.5262770    6.8585566
##   beta[3]          6.4788321  0.23792486    6.0152027    6.3187163
##   beta[4]          5.3544998  0.25809800    4.8611468    5.1760813
##   beta[5]          6.5621393  0.23868876    6.0741595    6.4089338
##   beta[6]          6.1856406  0.22634965    5.7541845    6.0310229
##   beta[7]          5.9801161  0.23182981    5.5480825    5.8212154
##   beta[8]          6.4141067  0.24447103    5.9768343    6.2471277
##   beta[9]          7.0404435  0.25888286    6.5673147    6.8619724
##   beta[10]         5.8496662  0.22831821    5.4139113    5.7108193
##   beta[11]         6.7874317  0.25067162    6.3198818    6.6057456
##   beta[12]         6.1177958  0.24128429    5.6657224    5.9519663
##   beta[13]         6.1614151  0.23292709    5.7297373    5.9983049
##   beta[14]         6.6994314  0.25401782    6.1964725    6.5290011
##   beta[15]         5.4280851  0.26279491    4.9186325    5.2530644
##   beta[16]         5.9274686  0.23922508    5.4748831    5.7604320
##   beta[17]         6.2718619  0.24134061    5.8024034    6.1079786
##   beta[18]         5.8464856  0.23954995    5.3810731    5.6811724
##   beta[19]         6.4110032  0.23786451    5.9633166    6.2528247
##   beta[20]         6.0580181  0.23891351    5.5657414    5.8903172
##   beta[21]         6.4011706  0.23449939    5.9254047    6.2478625
##   beta[22]         5.8556412  0.23895648    5.4089366    5.6885998
##   beta[23]         5.7443365  0.24353446    5.2432218    5.5779370
##   beta[24]         5.8892147  0.22709474    5.4276501    5.7371800
##   beta[25]         6.9020727  0.25197835    6.4027864    6.7391135
##   beta[26]         6.5519637  0.24514690    6.0461507    6.3893203
##   beta[27]         5.9080139  0.24509374    5.4181882    5.7405265
##   beta[28]         5.8514196  0.24489077    5.3814506    5.6858226
##   beta[29]         5.6661795  0.23853964    5.2121145    5.4987162
##   beta[30]         6.1303914  0.24583566    5.6567610    5.9692192
##   mu_alpha       242.4761564  2.62941449  237.3442088  240.7120689
##   mu_beta          6.1897151  0.10921358    5.9689288    6.1190184
##   sigmasq_y       37.1307557  5.65117216   27.3611873   33.2613774
##   sigmasq_alpha  217.5245422 62.55370370  126.1351352  170.1450736
##   sigmasq_beta     0.2704798  0.10169730    0.1254529    0.2030679
##   sigma_y          6.0761463  0.45979750    5.2307919    5.7672678
##   sigma_alpha     14.6062111  2.04629584   11.2309896   13.0439655
##   sigma_beta       0.5117069  0.09297588    0.3541929    0.4506306
##   alpha0         106.3024245  3.52542515   99.0851200  104.0627906
##   lp__          -437.9422746  6.96469341 -452.5664207 -442.5851673
##                stats
## parameter               50%          75%        97.5%
##   alpha[1]       239.879868  241.7792470  245.0922867
##   alpha[2]       247.818893  249.6545543  253.0766478
##   alpha[3]       252.383777  254.1624310  257.7258107
##   alpha[4]       232.480316  234.4694988  237.7142632
##   alpha[5]       231.650897  233.4492190  236.6261300
##   alpha[6]       249.826299  251.6229533  254.7817208
##   alpha[7]       228.849271  230.6241884  234.2050397
##   alpha[8]       248.405942  250.2425228  253.6463503
##   alpha[9]       283.431371  285.1403912  288.9079462
##   alpha[10]      219.182882  221.1034818  224.1198006
##   alpha[11]      258.270141  260.0634500  263.4392332
##   alpha[12]      228.093092  229.7887509  233.3291836
##   alpha[13]      242.498685  244.0687874  247.7222538
##   alpha[14]      268.417265  270.1335088  273.7682337
##   alpha[15]      242.804033  244.5922841  248.2080002
##   alpha[16]      245.254503  246.9830993  250.5793367
##   alpha[17]      232.102862  234.1010113  237.8241397
##   alpha[18]      240.313022  242.0445487  245.4404409
##   alpha[19]      253.807022  255.7874079  259.2944753
##   alpha[20]      241.483632  243.4160958  247.3198705
##   alpha[21]      248.572939  250.3101501  253.7881997
##   alpha[22]      225.164260  227.0134255  230.5216434
##   alpha[23]      228.605071  230.4496371  233.7221741
##   alpha[24]      245.175162  246.8092400  249.9381782
##   alpha[25]      234.517898  236.2634439  239.7557089
##   alpha[26]      253.907518  255.7720044  259.0744807
##   alpha[27]      254.440645  256.2416498  259.3059364
##   alpha[28]      242.893774  244.9369321  248.5238485
##   alpha[29]      217.824085  219.6402481  223.2027283
##   alpha[30]      241.479007  243.1399398  246.7231455
##   beta[1]          6.078187    6.2383717    6.5214337
##   beta[2]          7.031013    7.2102189    7.5466025
##   beta[3]          6.470269    6.6338087    6.9559815
##   beta[4]          5.357002    5.5213236    5.8859392
##   beta[5]          6.564889    6.7176634    7.0304905
##   beta[6]          6.190002    6.3379469    6.6301723
##   beta[7]          5.986635    6.1299667    6.4266673
##   beta[8]          6.402140    6.5763915    6.9100414
##   beta[9]          7.033315    7.2208969    7.5429233
##   beta[10]         5.840845    6.0030277    6.3033085
##   beta[11]         6.781050    6.9580698    7.2862568
##   beta[12]         6.121534    6.2778623    6.5746430
##   beta[13]         6.158695    6.3213722    6.6338210
##   beta[14]         6.708343    6.8598546    7.1911545
##   beta[15]         5.437599    5.5984419    5.9303236
##   beta[16]         5.937430    6.0903640    6.3752119
##   beta[17]         6.276339    6.4313819    6.7322425
##   beta[18]         5.849835    6.0016540    6.3343856
##   beta[19]         6.412204    6.5703356    6.8873194
##   beta[20]         6.065223    6.2282591    6.5095063
##   beta[21]         6.393358    6.5511882    6.8738453
##   beta[22]         5.851972    6.0167573    6.3123775
##   beta[23]         5.749334    5.9160245    6.2236651
##   beta[24]         5.880025    6.0390906    6.3313941
##   beta[25]         6.905456    7.0650970    7.4024805
##   beta[26]         6.548488    6.7233834    7.0292038
##   beta[27]         5.921891    6.0748489    6.3590642
##   beta[28]         5.843375    6.0126686    6.3601550
##   beta[29]         5.663809    5.8345518    6.1091530
##   beta[30]         6.134265    6.2848239    6.5937300
##   mu_alpha       242.568159  244.1238918  247.5197945
##   mu_beta          6.188517    6.2584363    6.4210782
##   sigmasq_y       36.609439   40.5166498   49.5635084
##   sigmasq_alpha  206.796166  250.4352537  359.4301276
##   sigmasq_beta     0.250146    0.3182033    0.5239610
##   sigma_y          6.050573    6.3652688    7.0401355
##   sigma_alpha     14.380409   15.8251462   18.9586403
##   sigma_beta       0.500146    0.5640951    0.7238515
##   alpha0         106.333400  108.7328521  113.2020324
##   lp__          -437.717895 -432.8859590 -425.8491977

0.1.4 eight schools example

# stan script
eightschools<-"
// saved as 8schools.stan
data {
  int<lower=0> J;         // number of schools 
  real y[J];              // estimated treatment effects
  real<lower=0> sigma[J]; // standard error of effect estimates 
}
parameters {
  real mu;                // population treatment effect
  real<lower=0> tau;      // standard deviation in treatment effects
  vector[J] eta;          // unscaled deviation from mu by school
}
transformed parameters {
  vector[J] theta = mu + tau * eta;        // school treatment effects
}
model {
  target += normal_lpdf(eta | 0, 1);       // prior log-density
  target += normal_lpdf(y | theta, sigma); // log-likelihood
}"
schools_dat <- list(J = 8, 
                    y = c(28,  8, -3,  7, -1,  1, 18, 12),
                    sigma = c(15, 10, 16, 11,  9, 11, 10, 18))
school.fit <- rstan::stan(model_code  = eightschools,
                         data = schools_dat)
## Warning: There were 3 divergent transitions after warmup. Increasing adapt_delta above 0.8 may help. See
## http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
## Warning: Examine the pairs() plot to diagnose sampling problems
print(school.fit)
## Inference for Stan model: 432a5acc93a981154b518b22530ee9a6.
## 4 chains, each with iter=2000; warmup=1000; thin=1; 
## post-warmup draws per chain=1000, total post-warmup draws=4000.
## 
##            mean se_mean   sd   2.5%    25%    50%    75%  97.5% n_eff Rhat
## mu         7.87    0.13 5.27  -2.47   4.56   7.89  11.15  17.95  1542    1
## tau        6.51    0.17 5.45   0.25   2.54   5.24   9.12  20.12  1060    1
## eta[1]     0.42    0.01 0.93  -1.46  -0.20   0.42   1.04   2.25  3973    1
## eta[2]     0.01    0.01 0.89  -1.76  -0.56   0.02   0.60   1.80  4430    1
## eta[3]    -0.20    0.01 0.94  -2.03  -0.85  -0.21   0.44   1.69  4722    1
## eta[4]    -0.04    0.02 0.90  -1.91  -0.64  -0.04   0.54   1.78  3512    1
## eta[5]    -0.33    0.01 0.86  -2.00  -0.91  -0.36   0.23   1.42  3949    1
## eta[6]    -0.21    0.01 0.89  -1.93  -0.80  -0.23   0.37   1.55  4264    1
## eta[7]     0.34    0.01 0.89  -1.49  -0.24   0.36   0.93   2.07  3710    1
## eta[8]     0.05    0.02 0.97  -1.86  -0.62   0.05   0.72   1.90  3996    1
## theta[1]  11.40    0.15 8.27  -2.91   6.07  10.53  15.54  31.24  3175    1
## theta[2]   7.95    0.09 6.47  -5.00   3.80   7.85  11.90  20.94  4897    1
## theta[3]   6.19    0.14 7.90 -11.12   1.87   6.67  11.12  20.86  3397    1
## theta[4]   7.50    0.09 6.66  -5.39   3.43   7.52  11.46  21.37  5357    1
## theta[5]   5.28    0.09 6.29  -8.32   1.58   5.66   9.48  16.47  4386    1
## theta[6]   6.14    0.10 6.70  -8.03   2.05   6.50  10.56  18.73  4458    1
## theta[7]  10.62    0.11 6.77  -0.81   6.09   9.97  14.56  25.88  3625    1
## theta[8]   8.30    0.14 8.20  -8.05   3.57   8.21  12.80  25.24  3465    1
## lp__     -39.64    0.08 2.61 -45.30 -41.26 -39.47 -37.73 -35.15  1203    1
## 
## Samples were drawn using NUTS(diag_e) at Wed Jan  9 23:54:04 2019.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at 
## convergence, Rhat=1).
plot(school.fit)
## 'pars' not specified. Showing first 10 parameters by default.
## ci_level: 0.8 (80% intervals)
## outer_level: 0.95 (95% intervals)

pairs(school.fit, pars = c("mu", "tau", "lp__"))

la <- rstan::extract(school.fit, permuted = TRUE) # return a list of arrays 
mu <- la$mu 

### return an array of three dimensions: iterations, chains, parameters 
a <- rstan::extract(school.fit, permuted = FALSE) 

### use S3 functions on stanfit objects
a2 <- as.array(school.fit)
m <- as.matrix(school.fit)
d <- as.data.frame(school.fit)

0.1.5 Generalised linear models in Stan

https://ourcodingclub.github.io/2018/04/30/stan-2.html

0.1.6 Bayesian Instrumental variables (with a hierarchical prior) in Stan

http://modernstatisticalworkflow.blogspot.com/2017/11/bayesian-instrumental-variables-with.html

0.1.7 Gaussian Process in Stan

Intro

The following demonstrates a gaussian process using the Bayesian programming language Stan. I also have a pure R approach (1, 2), where you will find a little more context, and a Matlab implementation (1).

The R code for this demo is here, the Stan model code here. A primary reference is Rasmussen & Williams (2006).

Data and Parameter Setup To start we will need to generate some data. We’ll have a simple x,y setup with a sample size of 20. But we’ll also create some test data to examine predictions later.

Data

set.seed(1234)
N = 20
Ntest = 200
x = rnorm(N, sd=5)
y = sin(x) + rnorm(N, sd=.1)
xtest = seq(min(x)-1, max(x)+1, l=Ntest)
plot(x,y, pch=19, col='#ff5500')

Covariance function and parameters In this demo we’ll use the squared exponential kernel, which has three parameters to estimate.

# parameters
eta_sq = 1
rho_sq = 1
sigma_sq = .1
# Covariance function same as implemented in the Stan code.
Kfn <- function (x, eta_sq, rho_sq, sigma_sq) {
  N = length(x)
  Sigma = matrix(NA, N, N)
  
  # off diag elements
  for (i in 1:(N-1)) {
    for (j in (i+1):N) {
      Sigma[i,j] <- eta_sq * exp(-rho_sq * (x[i] - x[j])^2);
      Sigma[j,i] <- Sigma[i,j];
    }
  }
  
  # diagonal elements
  for (k in 1:N)
    Sigma[k,k] <- eta_sq + sigma_sq; # + jitter
  Sigma
}

Visualize the priors With everything in place we can look at the some draws from the prior distribution.

xinit = seq(-5,5,.2)
xprior = MASS::mvrnorm(3, 
                       mu=rep(0, length(xinit)), 
                       Sigma=Kfn(x=xinit,
                                 eta_sq = eta_sq, 
                                 rho_sq = rho_sq, 
                                 sigma_sq = sigma_sq))
library(reshape2)
## 
## Attaching package: 'reshape2'
## The following object is masked from 'package:tidyr':
## 
##     smiths
gdat = melt(data.frame(x=xinit, y=t(xprior)), id='x')
library(ggvis)
## 
## Attaching package: 'ggvis'
## The following object is masked from 'package:ggplot2':
## 
##     resolution
gdat %>% 
  ggvis(~x, ~value) %>% 
  group_by(variable) %>% 
  layer_paths(strokeOpacity:=.5) %>% 
  add_axis('x', grid=F) %>% 
  add_axis('y', grid=F)

0.1.7.1 Stan Code and Data

Now we are ready for the Stan code. I’ve kept to the Stan manual section on Gaussian Processes (github).

gp = "
data {
  int<lower=1> N;                                # initial sample size
  vector[N] x;                                   # covariate
  vector[N] y;                                   # target
  int<lower=0> Ntest;                            # prediction set sample size
  vector[Ntest] xtest;                           # prediction values for covariate
}
transformed data {
  vector[N] mu;
  
  mu <- rep_vector(0, N);                        # mean function
}
parameters {
  real<lower=0> eta_sq;                          # parameters of squared exponential covariance function
  real<lower=0> inv_rho_sq;
  real<lower=0> sigma_sq;
}
transformed parameters {
  real<lower=0> rho_sq;
  rho_sq <- inv(inv_rho_sq);
}
model {
  matrix[N,N] Sigma;
  # off-diagonal elements for covariance matrix
  for (i in 1:(N-1)) {
    for (j in (i+1):N) {
      Sigma[i,j] <- eta_sq * exp(-rho_sq * pow(x[i] - x[j],2));
      Sigma[j,i] <- Sigma[i,j];
    }
  }
  # diagonal elements
  for (k in 1:N)
    Sigma[k,k] <- eta_sq + sigma_sq;             # + jitter for pos def
  # priors
  eta_sq ~ cauchy(0,5);
  inv_rho_sq ~ cauchy(0,5);
  sigma_sq ~ cauchy(0,5);
  # sampling distribution
  y ~ multi_normal(mu,Sigma);
}
generated quantities {
  vector[Ntest] muTest;                          # The following produces the posterior predictive draws
  vector[Ntest] yRep;                            # see GP section of Stan man- 'Analytical Form...'
  matrix[Ntest,Ntest] L;
  {
  matrix[N,N] Sigma;
  matrix[Ntest,Ntest] Omega;
  matrix[N,Ntest] K;
  matrix[Ntest,N] K_transpose_div_Sigma;
  matrix[Ntest,Ntest] Tau;
  # Sigma
  for (i in 1:N)
    for (j in 1:N)
      Sigma[i,j] <- exp(-pow(x[i] - x[j],2)) + if_else(i==j, 0.1, 0.0);
  # Omega
  for (i in 1:Ntest)
    for (j in 1:Ntest)
      Omega[i,j] <- exp(-pow(xtest[i] - xtest[j],2)) + if_else(i==j, 0.1, 0.0);
  # K
  for (i in 1:N)
    for (j in 1:Ntest)
      K[i,j] <- exp(-pow(x[i] - xtest[j],2));
  K_transpose_div_Sigma <- K' / Sigma;
  muTest <- K_transpose_div_Sigma * y;
  Tau <- Omega - K_transpose_div_Sigma * K;
  for (i in 1:(Ntest-1))
    for (j in (i+1):Ntest)
      Tau[i,j] <- Tau[j,i];
  L <- cholesky_decompose(Tau);
  }
  yRep <- multi_normal_cholesky_rng(muTest, L);
}
"

0.1.7.2 Model Fitting and Summary

Compile check When using Stan it’s good to do a very brief compile check before getting too far into debugging or the primary model fit. You don’t want to waste any more time than necessary to simply see if the code possibly works at all.

standata = list(N=N, x=x, y=y, xtest=xtest, Ntest=200)
library(rstan)
fit0 = stan(data=standata, model_code = gp, iter = 1, chains=1)

Primary fit Now we can do the main model fit. With the following setup it took about 2 minutes on my machine. The fit object is almost 1 gig though, so will not be investigated here except visually in terms of the posterior predictive draws produced in the generated quantities section of the Stan code.

iterations = 12000
wu = 2000
th = 20
chains = 4
library(parallel)
cl = makeCluster(4)
clusterExport(cl, c('gp',  'standata', 'fit0','iterations', 'wu', 'th', 'chains'))
clusterEvalQ(cl, library(rstan))
p = proc.time()
fit = parLapply(cl, seq(chains), 
                function(chain) stan(data=standata, model_code = gp, 
                                     iter = iterations, warmup = wu,
                                     thin=th, chains=1, chain_id=chain, 
                                     fit = fit0)
                )
(proc.time() - p)/3600
stopCluster(cl)

Visualize Posterior Predicitive

## #yRep = rstan::extract(fit, 'yRep')$yRep
load('stangp.RData'); suppressPackageStartupMessages(require(rstan))
gdat2 = melt(data.frame(x = sort(xtest), y=t(yRep[sample(2000, 3),])), id='x')
gdat2 %>% 
  ggvis(~x, ~value) %>% 
  group_by(variable) %>% 
  layer_paths(strokeOpacity:=.25) %>% 
  layer_points(x=~x, y=~y, fill:='#ff5500', data=gdat) %>% 
  add_axis('x', grid=F) %>% 
  add_axis('y', grid=F)
# Visualize fit
#yRepMean = get_posterior_mean(fit, 'yRep')[,5]
gdat3 = data.frame(x = sort(xtest), y=yRepMean)
gdat3 %>% 
  ggvis(~x, ~y) %>% 
  layer_paths(strokeOpacity:=.5, stroke:='blue') %>% 
  layer_points(x=~x, y=~y, fill:='#ff5500', data=gdat) %>% 
  add_axis('x', grid=F) %>% 
  add_axis('y', grid=F)